#reading data from file
import pandas as pd
df=pd.read_csv("ADANIPORTS.csv")
df.head()
| Date | Symbol | Series | Prev Close | Open | High | Low | Last | Close | VWAP | Volume | Turnover | Trades | Deliverable Volume | %Deliverble | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2007-11-27 | MUNDRAPORT | EQ | 440.00 | 770.00 | 1050.00 | 770.0 | 959.0 | 962.90 | 984.72 | 27294366 | 2.687719e+15 | NaN | 9859619 | 0.3612 |
| 1 | 2007-11-28 | MUNDRAPORT | EQ | 962.90 | 984.00 | 990.00 | 874.0 | 885.0 | 893.90 | 941.38 | 4581338 | 4.312765e+14 | NaN | 1453278 | 0.3172 |
| 2 | 2007-11-29 | MUNDRAPORT | EQ | 893.90 | 909.00 | 914.75 | 841.0 | 887.0 | 884.20 | 888.09 | 5124121 | 4.550658e+14 | NaN | 1069678 | 0.2088 |
| 3 | 2007-11-30 | MUNDRAPORT | EQ | 884.20 | 890.00 | 958.00 | 890.0 | 929.0 | 921.55 | 929.17 | 4609762 | 4.283257e+14 | NaN | 1260913 | 0.2735 |
| 4 | 2007-12-03 | MUNDRAPORT | EQ | 921.55 | 939.75 | 995.00 | 922.0 | 980.0 | 969.30 | 965.65 | 2977470 | 2.875200e+14 | NaN | 816123 | 0.2741 |
df.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 3322 entries, 0 to 3321 Data columns (total 15 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 3322 non-null object 1 Symbol 3322 non-null object 2 Series 3322 non-null object 3 Prev Close 3322 non-null float64 4 Open 3322 non-null float64 5 High 3322 non-null float64 6 Low 3322 non-null float64 7 Last 3322 non-null float64 8 Close 3322 non-null float64 9 VWAP 3322 non-null float64 10 Volume 3322 non-null int64 11 Turnover 3322 non-null float64 12 Trades 2456 non-null float64 13 Deliverable Volume 3322 non-null int64 14 %Deliverble 3322 non-null float64 dtypes: float64(10), int64(2), object(3) memory usage: 389.4+ KB
# converting data types of data
df['Date']=pd.to_datetime(df['Date'])
df.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 3322 entries, 0 to 3321 Data columns (total 15 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 3322 non-null datetime64[ns] 1 Symbol 3322 non-null object 2 Series 3322 non-null object 3 Prev Close 3322 non-null float64 4 Open 3322 non-null float64 5 High 3322 non-null float64 6 Low 3322 non-null float64 7 Last 3322 non-null float64 8 Close 3322 non-null float64 9 VWAP 3322 non-null float64 10 Volume 3322 non-null int64 11 Turnover 3322 non-null float64 12 Trades 2456 non-null float64 13 Deliverable Volume 3322 non-null int64 14 %Deliverble 3322 non-null float64 dtypes: datetime64[ns](1), float64(10), int64(2), object(2) memory usage: 389.4+ KB
#finding null values from data
df.isnull().sum()
Date 0 Symbol 0 Series 0 Prev Close 0 Open 0 High 0 Low 0 Last 0 Close 0 VWAP 0 Volume 0 Turnover 0 Trades 866 Deliverable Volume 0 %Deliverble 0 dtype: int64
val=df['Trades'].mean()
val
44922.58835504886
# we have removes the null values by using mean because these are first 866 values.
df['Trades']=df['Trades'].fillna(val)
df['Trades'].isnull().sum()
0
# Describing data and finding it's quartiles
df.describe()
| Prev Close | Open | High | Low | Last | Close | VWAP | Volume | Turnover | Trades | Deliverable Volume | %Deliverble | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 3322.000000 | 3322.000000 | 3322.000000 | 3322.000000 | 3322.000000 | 3322.000000 | 3322.000000 | 3.322000e+03 | 3.322000e+03 | 3.322000e+03 | 3.322000e+03 | 3322.000000 |
| mean | 344.114314 | 344.763019 | 351.608007 | 337.531969 | 344.239539 | 344.201626 | 344.853182 | 2.954564e+06 | 1.070144e+14 | 4.492259e+04 | 1.207441e+06 | 0.445899 |
| std | 192.936882 | 193.619992 | 198.617808 | 188.676614 | 193.187813 | 193.045886 | 193.841305 | 4.104227e+06 | 2.625564e+14 | 4.318821e+04 | 1.398640e+06 | 0.160496 |
| min | 108.000000 | 108.000000 | 110.450000 | 105.650000 | 108.000000 | 108.000000 | 108.340000 | 1.236600e+04 | 2.415857e+11 | 3.660000e+02 | 5.383000e+03 | 0.067000 |
| 25% | 164.312500 | 164.850000 | 168.000000 | 161.600000 | 164.075000 | 164.312500 | 164.855000 | 7.493682e+05 | 1.817650e+13 | 2.643650e+04 | 3.212005e+05 | 0.332900 |
| 50% | 324.700000 | 325.750000 | 331.275000 | 319.850000 | 325.000000 | 324.700000 | 325.765000 | 2.007292e+06 | 5.836041e+13 | 4.492259e+04 | 8.132775e+05 | 0.445650 |
| 75% | 400.912500 | 401.000000 | 407.187500 | 395.000000 | 400.912500 | 400.912500 | 400.607500 | 3.636883e+06 | 1.158526e+14 | 4.585475e+04 | 1.605528e+06 | 0.555850 |
| max | 1307.450000 | 1310.250000 | 1324.000000 | 1270.000000 | 1308.000000 | 1307.450000 | 1302.150000 | 9.771788e+07 | 8.160988e+15 | 1.205984e+06 | 2.241652e+07 | 0.979800 |
import plotly.express as px
#getting more clear with data and trends
fig = px.scatter_matrix(df, dimensions=["Open", "High", "Low","Last","Close","VWAP"], color="Symbol")
fig.show()
#identify the counts of symbols
fig = px.parallel_categories(df)
fig.show()
from sklearn.preprocessing import LabelEncoder
categorical_features = ['Symbol', 'Series']
# Create a LabelEncoder object for each categorical variable
label_encoders = {}
for categorical_feature in categorical_features:
label_encoders[categorical_feature] = LabelEncoder()
# Encode the categorical variables using the LabelEncoder objects
for categorical_feature in categorical_features:
df[categorical_feature] = label_encoders[categorical_feature].fit_transform(df[categorical_feature])
df.head()
| Date | Symbol | Series | Prev Close | Open | High | Low | Last | Close | VWAP | Volume | Turnover | Trades | Deliverable Volume | %Deliverble | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2007-11-27 | 1 | 0 | 440.00 | 770.00 | 1050.00 | 770.0 | 959.0 | 962.90 | 984.72 | 27294366 | 2.687719e+15 | 44922.588355 | 9859619 | 0.3612 |
| 1 | 2007-11-28 | 1 | 0 | 962.90 | 984.00 | 990.00 | 874.0 | 885.0 | 893.90 | 941.38 | 4581338 | 4.312765e+14 | 44922.588355 | 1453278 | 0.3172 |
| 2 | 2007-11-29 | 1 | 0 | 893.90 | 909.00 | 914.75 | 841.0 | 887.0 | 884.20 | 888.09 | 5124121 | 4.550658e+14 | 44922.588355 | 1069678 | 0.2088 |
| 3 | 2007-11-30 | 1 | 0 | 884.20 | 890.00 | 958.00 | 890.0 | 929.0 | 921.55 | 929.17 | 4609762 | 4.283257e+14 | 44922.588355 | 1260913 | 0.2735 |
| 4 | 2007-12-03 | 1 | 0 | 921.55 | 939.75 | 995.00 | 922.0 | 980.0 | 969.30 | 965.65 | 2977470 | 2.875200e+14 | 44922.588355 | 816123 | 0.2741 |
from sklearn.preprocessing import MinMaxScaler
# Identify the numerical features in the stock DataFrame
numerical_features = ['Prev Close','Open', 'High', 'Low','Last','Close', 'Volume','VWAP','Turnover','Trades']
# Create a MinMaxScaler object
scaler = MinMaxScaler()
# Fit the scaler to the numerical features
scaler.fit(df[numerical_features])
# Transform the numerical features using the fitted scaler
normalized_features = scaler.transform(df[numerical_features])
# Update the stock DataFrame with the normalized features
df[numerical_features] = normalized_features
# Save the preprocessed stock DataFrame
df.to_csv('preprocessed_stock_dataset.csv', index=False)
from sklearn.feature_selection import SelectKBest, f_classif
stocks_df=pd.read_csv("preprocessed_stock_dataset.csv")
stocks_df.head()
| Date | Symbol | Series | Prev Close | Open | High | Low | Last | Close | VWAP | Volume | Turnover | Trades | Deliverable Volume | %Deliverble | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2007-11-27 | 1 | 0 | 0.276794 | 0.550634 | 0.774216 | 0.570576 | 0.709167 | 0.712743 | 0.734103 | 0.279227 | 0.329318 | 0.036957 | 9859619 | 0.3612 |
| 1 | 2007-11-28 | 1 | 0 | 0.712743 | 0.728634 | 0.724774 | 0.659896 | 0.647500 | 0.655217 | 0.697799 | 0.046763 | 0.052818 | 0.036957 | 1453278 | 0.3172 |
| 2 | 2007-11-29 | 1 | 0 | 0.655217 | 0.666251 | 0.662766 | 0.631554 | 0.649167 | 0.647130 | 0.653161 | 0.052318 | 0.055733 | 0.036957 | 1069678 | 0.2088 |
| 3 | 2007-11-30 | 1 | 0 | 0.647130 | 0.650447 | 0.698406 | 0.673638 | 0.684167 | 0.678269 | 0.687572 | 0.047054 | 0.052456 | 0.036957 | 1260913 | 0.2735 |
| 4 | 2007-12-03 | 1 | 0 | 0.678269 | 0.691828 | 0.728895 | 0.701121 | 0.726667 | 0.718079 | 0.718129 | 0.030347 | 0.035202 | 0.036957 | 816123 | 0.2741 |
dfr = df[['Close','Open','High','Low','Last','Prev Close']]
cor_matrix = dfr.corr()
cor_matrix
| Close | Open | High | Low | Last | Prev Close | |
|---|---|---|---|---|---|---|
| Close | 1.000000 | 0.997934 | 0.999058 | 0.998904 | 0.999961 | 0.995193 |
| Open | 0.997934 | 1.000000 | 0.998697 | 0.998741 | 0.997855 | 0.997228 |
| High | 0.999058 | 0.998697 | 1.000000 | 0.997991 | 0.999045 | 0.995592 |
| Low | 0.998904 | 0.998741 | 0.997991 | 1.000000 | 0.998788 | 0.996221 |
| Last | 0.999961 | 0.997855 | 0.999045 | 0.998788 | 1.000000 | 0.995130 |
| Prev Close | 0.995193 | 0.997228 | 0.995592 | 0.996221 | 0.995130 | 1.000000 |
import seaborn as sns
import matplotlib.pyplot as plt
#From this correlation analysis we can find that the every independent variable is having
#a high correlatiion with the dependent variable.
sns.heatmap(cor_matrix , cmap = 'coolwarm' , annot = False)
plt.title('Correlation of Variables ')
Text(0.5, 1.0, 'Correlation of Variables ')
#Checking the relationship of each variable with Dependent variable
sns.regplot(x = df[['Close']], y = df[['Open']], data= df)
plt.title("Relationship of Closing and Opening price")
Text(0.5, 1.0, 'Relationship of Closing and Opening price')
#Relationship of Closing and High price
sns.regplot(x = df[['Close']], y = df[['High']], data= df)
plt.title("Relationship of Closing and High price")
Text(0.5, 1.0, 'Relationship of Closing and High price')
sns.regplot(x = df[['Close']], y = df[['Low']], data= df)
plt.title("Relationship of Closing and Low price")
Text(0.5, 1.0, 'Relationship of Closing and Low price')
sns.regplot(x = df[['Close']], y = df[['Last']], data= df)
plt.title("Relationship of Closing and Last price")
Text(0.5, 1.0, 'Relationship of Closing and Last price')
sns.regplot(x = df[['Close']], y = df[['Prev Close']], data= df)
plt.title("Relationship of Closing and Previouse close price")
Text(0.5, 1.0, 'Relationship of Closing and Previouse close price')
#Alloting the Dependent and Independent Variable
y = df[['Close']]
x = df[['Open','High','Low','Last','Prev Close']]
#Spiliting the Dependent and Independet Variables in Training and Testings values
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size = 0.3 , random_state = 0)
#dimentions of training models
x_train.shape
(2325, 5)
#dimentions of testing models
x_test.shape
(997, 5)
from sklearn.linear_model import LinearRegression
# training model
lm = LinearRegression()
lm.fit(x_train,y_train)
print('Intercept: ' , lm.intercept_)
print('coefficient : ' , lm.coef_)
Intercept: [8.36016716e-05] coefficient : [[-0.06094679 0.07256409 0.12031454 0.87010004 -0.00417572]]
#Predicting the model
yhat = lm.predict(x_test)
print(yhat[:10])
[[0.00631646] [0.03618599] [0.19698612] [0.1123224 ] [0.26726579] [0.0388566 ] [0.64792116] [0.20915232] [0.13642555] [0.03575146]]
print({'Actual': y_test, 'Predicted': yhat})
{'Actual': Close
1182 0.007170
1396 0.036558
1773 0.197716
2057 0.111676
2520 0.267498
... ...
2300 0.168661
1379 0.035308
2100 0.062404
746 0.031431
1917 0.188920
[997 rows x 1 columns], 'Predicted': array([[0.00631646],
[0.03618599],
[0.19698612],
[0.1123224 ],
[0.26726579],
[0.0388566 ],
[0.64792116],
[0.20915232],
[0.13642555],
[0.03575146],
[0.37425895],
[0.20865564],
[0.16150065],
[0.39065828],
[0.14656693],
[0.13198931],
[0.17027852],
[0.96885307],
[0.04063208],
[0.12415679],
[0.4953962 ],
[0.13123135],
[0.21967915],
[0.52919871],
[0.20524422],
[0.53460245],
[0.15520969],
[0.14887643],
[0.03843309],
[0.17968596],
[0.06423629],
[0.36534278],
[0.22687678],
[0.38810698],
[0.79146577],
[0.3113638 ],
[0.09609237],
[0.15514092],
[0.12226543],
[0.03193848],
[0.19834181],
[0.23657218],
[0.21702111],
[0.01265987],
[0.15370139],
[0.23154348],
[0.25197681],
[0.37565874],
[0.1783243 ],
[0.01895453],
[0.35036133],
[0.02758339],
[0.1991408 ],
[0.02474008],
[0.51871177],
[0.56928375],
[0.17451735],
[0.0117702 ],
[0.16090162],
[0.21816103],
[0.04765847],
[0.1683212 ],
[0.17758836],
[0.41719872],
[0.01863105],
[0.00565635],
[0.03016971],
[0.19415862],
[0.33599861],
[0.20662479],
[0.24013376],
[0.5960684 ],
[0.20963561],
[0.10095272],
[0.24107905],
[0.21390937],
[0.22384399],
[0.03344625],
[0.19030603],
[0.22675633],
[0.03837125],
[0.16969676],
[0.14357878],
[0.1307556 ],
[0.19949509],
[0.15177525],
[0.52478257],
[0.03568332],
[0.08281341],
[0.29042668],
[0.24917455],
[0.1538589 ],
[0.02923768],
[0.03945552],
[0.17086518],
[0.17767154],
[0.0201706 ],
[0.35962669],
[0.1492096 ],
[0.24070797],
[0.08177031],
[0.02426273],
[0.17004626],
[0.568165 ],
[0.180839 ],
[0.03526899],
[0.45237956],
[0.52311531],
[0.42228286],
[0.65950142],
[0.03040722],
[0.19397777],
[0.50544763],
[0.25643237],
[0.24730778],
[0.1964224 ],
[0.44257502],
[0.1383764 ],
[0.26284768],
[0.33487687],
[0.60647319],
[0.57541777],
[0.21021293],
[0.21528407],
[0.38980696],
[0.21824086],
[0.00313696],
[0.01535143],
[0.3750666 ],
[0.16526596],
[0.12900725],
[0.21249343],
[0.2477313 ],
[0.78268254],
[0.13431252],
[0.04341983],
[0.04771664],
[0.25681342],
[0.03236772],
[0.19556386],
[0.13645743],
[0.03821074],
[0.49598491],
[0.38720068],
[0.44144143],
[0.35475718],
[0.17547383],
[0.23077774],
[0.01852798],
[0.04653258],
[0.12205291],
[0.24885978],
[0.22740621],
[0.25640822],
[0.02228056],
[0.23013611],
[0.13283088],
[0.02898219],
[0.19276384],
[0.22549146],
[0.52869714],
[0.21410412],
[0.51707311],
[0.04366455],
[0.15132975],
[0.27438502],
[0.27370241],
[0.0267687 ],
[0.23469743],
[0.21663428],
[0.02135113],
[0.24225491],
[0.17836109],
[0.03848665],
[0.10145266],
[0.01677094],
[0.1676515 ],
[0.24307194],
[0.39128245],
[0.1752277 ],
[0.15654077],
[0.34973253],
[0.02425267],
[0.140317 ],
[0.81316745],
[0.08283851],
[0.21644378],
[0.02211633],
[0.22037742],
[0.22833886],
[0.19392128],
[0.03312693],
[0.0484279 ],
[0.13534528],
[0.40970907],
[0.16720409],
[0.12101175],
[0.05836124],
[0.03487509],
[0.23566549],
[0.23457717],
[0.03545319],
[0.03237176],
[0.10452347],
[0.19777776],
[0.23269168],
[0.03880861],
[0.17820407],
[0.2392903 ],
[0.2173824 ],
[0.49642385],
[0.16888095],
[0.0322742 ],
[0.2349573 ],
[0.22288568],
[0.36751284],
[0.16762526],
[0.24968048],
[0.04449034],
[0.26098143],
[0.19977794],
[0.48150292],
[0.53403205],
[0.20019838],
[0.21948555],
[0.13249368],
[0.20777699],
[0.01333307],
[0.13351462],
[0.33582526],
[0.03524104],
[0.18028708],
[0.01277167],
[0.00917215],
[0.04108976],
[0.21230934],
[0.0743793 ],
[0.13695754],
[0.03832485],
[0.00380495],
[0.04134986],
[0.01533353],
[0.42481111],
[0.84300653],
[0.20740094],
[0.35672342],
[0.06346949],
[0.22780904],
[0.06043245],
[0.03052817],
[0.18767611],
[0.53109524],
[0.10076164],
[0.03637362],
[0.23548523],
[0.23123542],
[0.27199893],
[0.12841862],
[0.03673378],
[0.13467821],
[0.22584213],
[0.57236236],
[0.16018587],
[0.04984412],
[0.18474058],
[0.17841049],
[0.34447466],
[0.58689815],
[0.17053224],
[0.23307498],
[0.0157414 ],
[0.17235784],
[0.54034996],
[0.13345953],
[0.17214542],
[0.27407989],
[0.01389338],
[0.41347645],
[0.57687609],
[0.26273956],
[0.11672372],
[0.10967751],
[0.16849722],
[0.1938245 ],
[0.19478532],
[0.13703438],
[0.10239839],
[0.01666063],
[0.04354209],
[0.36463355],
[0.03387371],
[0.03678561],
[0.04356809],
[0.58791196],
[0.03553374],
[0.12815627],
[0.2979276 ],
[0.19582141],
[0.12870068],
[0.01830071],
[0.02292191],
[0.22808684],
[0.17456418],
[0.48188798],
[0.24378583],
[0.31563179],
[0.40141161],
[0.03289973],
[0.56504378],
[0.03437274],
[0.19306084],
[0.16276887],
[0.01051347],
[0.013521 ],
[0.02883222],
[0.54929628],
[0.53692565],
[0.3966008 ],
[0.53765013],
[0.01442943],
[0.18019162],
[0.1032852 ],
[0.20504873],
[0.2214555 ],
[0.03900341],
[0.32002781],
[0.0357151 ],
[0.08009523],
[0.40159807],
[0.57235462],
[0.53933825],
[0.21243895],
[0.10352348],
[0.14830547],
[0.37004928],
[0.03868456],
[0.17302852],
[0.03429542],
[0.21580065],
[0.45691889],
[0.22578932],
[0.50538876],
[0.38989577],
[0.17522302],
[0.01350034],
[0.18212064],
[0.23090583],
[0.01950843],
[0.22753915],
[0.25075994],
[0.14466658],
[0.62544832],
[0.22032788],
[0.55236145],
[0.23302473],
[0.03549564],
[0.21815858],
[0.03906797],
[0.15159004],
[0.09093585],
[0.19429157],
[0.04299759],
[0.2549043 ],
[0.26977594],
[0.23708347],
[0.29623998],
[0.12809951],
[0.21973869],
[0.19781101],
[0.13091528],
[0.04060656],
[0.17340592],
[0.55806167],
[0.21355512],
[0.01413549],
[0.34689466],
[0.64807696],
[0.1375856 ],
[0.14148548],
[0.1806938 ],
[0.11364873],
[0.03760013],
[0.13164149],
[0.22721902],
[0.46277768],
[0.47296099],
[0.13519828],
[0.23977948],
[0.50816046],
[0.23547144],
[0.19999391],
[0.37360037],
[0.22812505],
[0.36251632],
[0.40321568],
[0.04771858],
[0.07934227],
[0.26496283],
[0.61216541],
[0.03349433],
[0.24376274],
[0.12743493],
[0.27442475],
[0.23315965],
[0.00609073],
[0.51373086],
[0.20269984],
[0.06559808],
[0.1612847 ],
[0.23668129],
[0.13821659],
[0.04191234],
[0.03938765],
[0.2237211 ],
[0.22961531],
[0.01531133],
[0.24310852],
[0.7985166 ],
[0.25039681],
[0.23615388],
[0.21257427],
[0.18109292],
[0.17218454],
[0.1677702 ],
[0.06148533],
[0.58300859],
[0.0291209 ],
[0.02376124],
[0.57594678],
[0.21812911],
[0.0325686 ],
[0.04687142],
[0.0257667 ],
[0.1959138 ],
[0.26967327],
[0.16341233],
[0.03184531],
[0.2307826 ],
[0.50896944],
[0.23611554],
[0.03684314],
[0.13320682],
[0.24671208],
[0.17701277],
[0.02377867],
[0.13042072],
[0.0451477 ],
[0.1821152 ],
[0.51915896],
[0.02341315],
[0.1878597 ],
[0.18349337],
[0.40806186],
[0.25429005],
[0.13485564],
[0.2378939 ],
[0.03285621],
[0.38406982],
[0.03950152],
[0.3592562 ],
[0.12331696],
[0.20425208],
[0.03290947],
[0.04207033],
[0.03398164],
[0.59666244],
[0.22377191],
[0.03329026],
[0.56495946],
[0.08887974],
[0.99870854],
[0.27446927],
[0.16133798],
[0.23370095],
[0.42712907],
[0.12900024],
[0.01809855],
[0.0442792 ],
[0.16431731],
[0.45163874],
[0.01982523],
[0.08525896],
[0.23444171],
[0.13393699],
[0.29041509],
[0.01344748],
[0.21465051],
[0.1366045 ],
[0.04879617],
[0.72460595],
[0.18517756],
[0.4435234 ],
[0.19800875],
[0.24316162],
[0.29540987],
[0.5105466 ],
[0.16444849],
[0.13704244],
[0.01247467],
[0.00297634],
[0.01602711],
[0.37968103],
[0.19831696],
[0.19444974],
[0.00374118],
[0.03370688],
[0.21842096],
[0.22937231],
[0.0170442 ],
[0.01315435],
[0.08250508],
[0.47970534],
[0.14133522],
[0.01435972],
[0.24324637],
[0.0105298 ],
[0.14416776],
[0.21163192],
[0.02404608],
[0.21447689],
[0.43936631],
[0.02091156],
[0.21946719],
[0.03670419],
[0.03703443],
[0.33282378],
[0.55229886],
[0.16860672],
[0.03887409],
[0.5311642 ],
[0.513533 ],
[0.20909804],
[0.44406432],
[0.35308915],
[0.23246113],
[0.03725934],
[0.51182989],
[0.29742322],
[0.12737621],
[0.15127292],
[0.21983825],
[0.41407977],
[0.3017699 ],
[0.41091174],
[0.39051635],
[0.24169463],
[0.04389474],
[0.2218463 ],
[0.17528599],
[0.07854076],
[0.06765321],
[0.2321782 ],
[0.08113161],
[0.51613534],
[0.47816078],
[0.03681286],
[0.16987703],
[0.22541392],
[0.04091067],
[0.21680682],
[0.01369309],
[0.03256901],
[0.17335902],
[0.56586114],
[0.03653242],
[0.21198595],
[0.21556344],
[0.17191031],
[0.18325772],
[0.53313663],
[0.03076738],
[0.54553131],
[0.02338367],
[0.00654744],
[0.42131448],
[0.17542395],
[0.00363866],
[0.58204952],
[0.02555841],
[0.01267479],
[0.22022023],
[0.25386357],
[0.50936903],
[0.20855365],
[0.13131398],
[0.11535872],
[0.230599 ],
[0.44757459],
[0.20239578],
[0.23610088],
[0.12821179],
[0.08350508],
[0.17152874],
[0.03754505],
[0.80442713],
[0.00875663],
[0.11128081],
[0.10260668],
[0.18007514],
[0.44241518],
[0.39659803],
[0.89268742],
[0.06258905],
[0.18334563],
[0.03969533],
[0.13416389],
[0.04102412],
[0.59170917],
[0.07085625],
[0.19994696],
[0.16798402],
[0.17119298],
[0.18593054],
[0.18777464],
[0.02872238],
[0.04559681],
[0.09838959],
[0.24167515],
[0.03847838],
[0.1795245 ],
[0.00586669],
[0.0271578 ],
[0.20020574],
[0.01982102],
[0.3122095 ],
[0.21456528],
[0.12460479],
[0.45002439],
[0.03043369],
[0.19898985],
[0.09091221],
[0.02684196],
[0.02937739],
[0.22837606],
[0.24783303],
[0.22084475],
[0.19614924],
[0.12476349],
[0.18502265],
[0.02699796],
[0.11810401],
[0.2161299 ],
[0.05259768],
[0.01983078],
[0.43747678],
[0.08496842],
[0.01623855],
[0.35913259],
[0.18878917],
[0.0057946 ],
[0.21578385],
[0.40876015],
[0.1394717 ],
[0.19089375],
[0.15052814],
[0.22842144],
[0.02546109],
[0.0291328 ],
[0.02727003],
[0.06029477],
[0.10018619],
[0.23621096],
[0.251608 ],
[0.04561889],
[0.00184244],
[0.15460398],
[0.19341217],
[0.03491596],
[0.48214823],
[0.03718351],
[0.11744312],
[0.01377249],
[0.1732623 ],
[0.09409246],
[0.00230192],
[0.17906561],
[0.1852343 ],
[0.23126047],
[0.02916685],
[0.14290588],
[0.28578954],
[0.13163934],
[0.07072955],
[0.02095796],
[0.22579283],
[0.21355401],
[0.19619337],
[0.14132377],
[0.36693162],
[0.03289883],
[0.17275435],
[0.03502109],
[0.13375618],
[0.22215834],
[0.12870076],
[0.50685966],
[0.21591821],
[0.46880471],
[0.23233007],
[0.2117356 ],
[0.59330261],
[0.01131728],
[0.1308581 ],
[0.57435025],
[0.33481412],
[0.03125941],
[0.24566852],
[0.22694051],
[0.21020375],
[0.18715983],
[0.07127985],
[0.03069401],
[0.16922075],
[0.04623274],
[0.02244441],
[0.03773651],
[0.02403582],
[0.28775985],
[0.04448287],
[0.23023144],
[0.34812835],
[0.26769376],
[0.33337163],
[0.09012088],
[0.09638412],
[0.05215035],
[0.03474677],
[0.02406135],
[0.14040109],
[0.37830174],
[0.03506289],
[0.0664409 ],
[0.18971212],
[0.51537484],
[0.22957602],
[0.01257795],
[0.18081393],
[0.20898044],
[0.13768934],
[0.53192162],
[0.40041853],
[0.2176546 ],
[0.22279197],
[0.39823699],
[0.17553118],
[0.01307661],
[0.08631183],
[0.10738628],
[0.12725135],
[0.01984216],
[0.39482659],
[0.3743 ],
[0.11997875],
[0.24792046],
[0.03226214],
[0.18253588],
[0.25649122],
[0.53436927],
[0.17459944],
[0.04297405],
[0.18473021],
[0.00492713],
[0.07746523],
[0.20053243],
[0.21039762],
[0.20930788],
[0.03212312],
[0.6054445 ],
[0.20615148],
[0.48281787],
[0.05953865],
[0.02781694],
[0.22499037],
[0.04258294],
[0.2106083 ],
[0.19913291],
[0.23399008],
[0.3864704 ],
[0.20009956],
[0.0273974 ],
[0.0197919 ],
[0.56816256],
[0.23767455],
[0.01201855],
[0.02533894],
[0.18124776],
[0.36694024],
[0.00655603],
[0.18696929],
[0.01432035],
[0.37034052],
[0.52925714],
[0.01818443],
[0.35321013],
[0.0439094 ],
[0.17560812],
[0.25337828],
[0.24213057],
[0.78662873],
[0.02657215],
[0.00996607],
[0.22201852],
[0.22217741],
[0.25598304],
[0.23677759],
[0.02977411],
[0.02716604],
[0.1739243 ],
[0.0269396 ],
[0.13530912],
[0.03245438],
[0.30187538],
[0.02664737],
[0.40888013],
[0.26899274],
[0.22217967],
[0.01321624],
[0.16572694],
[0.21872993],
[0.02516097],
[0.35841849],
[0.51869703],
[0.37153566],
[0.11860613],
[0.17656327],
[0.16697356],
[0.25264557],
[0.0161542 ],
[0.23267023],
[0.22755523],
[0.1513894 ],
[0.38665251],
[0.18534293],
[0.24732085],
[0.21122728],
[0.03250462],
[0.20517808],
[0.1398048 ],
[0.36379117],
[0.03248819],
[0.27254076],
[0.50711643],
[0.07872371],
[0.51059429],
[0.18627006],
[0.40179374],
[0.63163711],
[0.24043508],
[0.02591459],
[0.24848348],
[0.03715249],
[0.15995102],
[0.51538303],
[0.48775739],
[0.27553127],
[0.01344654],
[0.02371379],
[0.01034969],
[0.18995784],
[0.14023307],
[0.39646383],
[0.0413945 ],
[0.15185393],
[0.13721937],
[0.18756077],
[0.2175824 ],
[0.21562716],
[0.01034951],
[0.02288165],
[0.08813082],
[0.42824886],
[0.02828318],
[0.53241164],
[0.21146482],
[0.01853347],
[0.01209696],
[0.02063168],
[0.19589004],
[0.03016359],
[0.3356222 ],
[0.36130752],
[0.02044048],
[0.01937215],
[0.02904321],
[0.03536371],
[0.34424877],
[0.0715901 ],
[0.15166507],
[0.02109129],
[0.25217189],
[0.02820744],
[0.18215351],
[0.0245648 ],
[0.15259594],
[0.23566465],
[0.24940612],
[0.43295321],
[0.1264385 ],
[0.03770872],
[0.03893708],
[0.03689801],
[0.22507832],
[0.01940638],
[0.5886273 ],
[0.24782423],
[0.51433644],
[0.20930968],
[0.07945867],
[0.03417024],
[0.02070685],
[0.56197505],
[0.01450307],
[0.20539653],
[0.02163001],
[0.04985627],
[0.25754531],
[0.21958295],
[0.17233183],
[0.51020806],
[0.21554184],
[0.14360092],
[0.22300042],
[0.03147814],
[0.04755838],
[0.01589131],
[0.62042699],
[0.14875783],
[0.08012583],
[0.0430636 ],
[0.15604499],
[0.01663859],
[0.24776009],
[0.01817706],
[0.15739394],
[0.30083571],
[0.19551225],
[0.2195802 ],
[0.01552356],
[0.02713362],
[0.22888161],
[0.25393342],
[0.01677282],
[0.03877754],
[0.02998498],
[0.19619915],
[0.02146127],
[0.19332286],
[0.52059046],
[0.32874806],
[0.93897335],
[0.20323023],
[0.03348429],
[0.11894916],
[0.33983201],
[0.36133244],
[0.03668596],
[0.23023587],
[0.00333818],
[0.17428017],
[0.03145703],
[0.19890644],
[0.20976383],
[0.23968301],
[0.21829186],
[0.46509558],
[0.01518898],
[0.01751971],
[0.13259039],
[0.03510039],
[0.12652641],
[0.04544215],
[0.13023667],
[0.41704855],
[0.21623784],
[0.17263613],
[0.50426849],
[0.12826793],
[0.30687446],
[0.02276549],
[0.24878343],
[0.04656642],
[0.39076027],
[0.25100204],
[0.21685007],
[0.02672718],
[0.21340187],
[0.21555318],
[0.03947591],
[0.01254886],
[0.22046853],
[0.20555926],
[0.04359588],
[0.16896081],
[0.03548178],
[0.06260621],
[0.03138168],
[0.18860133]])}
axl = sns.distplot(y_test, hist = False , color = 'r')
sns.distplot(yhat, hist = False , color = 'b', ax = axl)
C:\Users\vaibh\anaconda3\lib\site-packages\seaborn\distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `kdeplot` (an axes-level function for kernel density plots). C:\Users\vaibh\anaconda3\lib\site-packages\seaborn\distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `kdeplot` (an axes-level function for kernel density plots).
<AxesSubplot:ylabel='Density'>
from sklearn.metrics import mean_squared_error
mean_squared_error(y_test, yhat )
1.6117829755948168e-06
rsqr = lm.score(x,y)
print('R - Squared :' , rsqr)
R - Squared : 0.9999357269364687
sns.regplot( x = y_test, y = yhat , data = df)
plt.title('Relatioship of Y_test and Yhat')
plt.ylabel('y_test')
plt.xlabel('yhat')
Text(0.5, 0, 'yhat')
#if we consider the values Open : 3326 High : 3335 Low : 3302 Last : 3308 Prev Close : 3324
values = [[3326,3335,3302,3308,3324]]
closep = lm.predict(values)
print('Closing Price :', closep)
Closing Price : [[3300.98174807]]